For affected CPUs, this approach enables the mitigation during EL3
initialization, following every PE reset. No mechanism is provided to
disable the mitigation at runtime.
This approach permanently mitigates the entire software stack and no
additional mitigation code is required in other software components.
TF-A implements this approach for the following affected CPUs:
* Cortex-A57 and Cortex-A72, by setting bit 55 (Disable load pass store) of
`CPUACTLR_EL1` (`S3_1_C15_C2_0`).
* Cortex-A73, by setting bit 3 of `S3_0_C15_C0_0` (not documented in the
Technical Reference Manual (TRM)).
* Cortex-A75, by setting bit 35 (reserved in TRM) of `CPUACTLR_EL1`
(`S3_0_C15_C1_0`).
Additionally, a new SMC interface is implemented to allow software
executing in lower ELs to discover whether the system is mitigated
against CVE-2018-3639.
Refer to "Firmware interfaces for mitigating cache speculation
vulnerabilities System Software on Arm Systems"[0] for more
information.
[0] https://developer.arm.com/cache-speculation-vulnerability-firmware-specification
Change-Id: I084aa7c3bc7c26bf2df2248301270f77bed22ceb
Signed-off-by: Dimitris Papastamos <[email protected]>
with the recommendation in the spec regarding workaround discovery.
Defaults to 1.
+- ``WORKAROUND_CVE_2018_3639``: Enables the security workaround for
+ `CVE-2018-3639`_. Defaults to 1. The TF-A project recommends to keep
+ the default value of 1 even on platforms that are unaffected by
+ CVE-2018-3639, in order to comply with the recommendation in the spec
+ regarding workaround discovery.
+
CPU Errata Workarounds
----------------------
#define CORTEX_A57_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_DMB (ULL(1) << 59)
+#define CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A57_CPUACTLR_EL1_GRE_NGRE_AS_NGNRE (ULL(1) << 54)
#define CORTEX_A57_CPUACTLR_EL1_DIS_OVERREAD (ULL(1) << 52)
#define CORTEX_A57_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_EL1 S3_1_C15_C2_0
#define CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH (ULL(1) << 56)
+#define CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE (ULL(1) << 55)
#define CORTEX_A72_CPUACTLR_EL1_NO_ALLOC_WBWA (ULL(1) << 49)
#define CORTEX_A72_CPUACTLR_EL1_DCC_AS_DCCI (ULL(1) << 44)
#define CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH (ULL(1) << 32)
******************************************************************************/
#define CORTEX_A73_L2MERRSR_EL1 S3_1_C15_C2_3 /* Instruction def. */
+/*******************************************************************************
+ * CPU implementation defined register specific definitions.
+ ******************************************************************************/
+#define CORTEX_A73_IMP_DEF_REG1 S3_0_C15_C0_0
+
+#define CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE (1 << 3)
+
#endif /* __CORTEX_A73_H__ */
#define CORTEX_A75_CPUPWRCTLR_EL1 S3_0_C15_C2_7
#define CORTEX_A75_CPUECTLR_EL1 S3_0_C15_C1_4
+/*******************************************************************************
+ * CPU Auxiliary Control register specific definitions.
+ ******************************************************************************/
+#define CORTEX_A75_CPUACTLR_EL1 S3_0_C15_C1_0
+
+#define CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE (1 << 35)
+
/* Definitions of register field mask in CORTEX_A75_CPUPWRCTLR_EL1 */
#define CORTEX_A75_CORE_PWRDN_EN_MASK 0x1
#define SMCCC_VERSION U(0x80000000)
#define SMCCC_ARCH_FEATURES U(0x80000001)
#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
+#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
+
+#define SMCCC_ARCH_NOT_REQUIRED -2
#endif /* __ARM_ARCH_SVC_H__ */
ret
endfunc check_errata_cve_2017_5715
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2018_3639
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19
msr vbar_el3, x0
#endif
+#if WORKAROUND_CVE_2018_3639
+ mrs x0, CORTEX_A57_CPUACTLR_EL1
+ orr x0, x0, #CORTEX_A57_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
+ msr CORTEX_A57_CPUACTLR_EL1, x0
+ isb
+ dsb sy
+#endif
+
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
ldp x8, x30, [sp], #16
ret
ret
endfunc check_errata_cve_2017_5715
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2018_3639
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
* -------------------------------------------------
1:
#endif
+#if WORKAROUND_CVE_2018_3639
+ mrs x0, CORTEX_A72_CPUACTLR_EL1
+ orr x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
+ msr CORTEX_A72_CPUACTLR_EL1, x0
+ isb
+ dsb sy
+#endif
+
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
ldp x8, x30, [sp], #16
ret
1:
#endif
+#if WORKAROUND_CVE_2018_3639
+ mrs x0, CORTEX_A73_IMP_DEF_REG1
+ orr x0, x0, #CORTEX_A73_IMP_DEF_REG1_DISABLE_LOAD_PASS_STORE
+ msr CORTEX_A73_IMP_DEF_REG1, x0
+ isb
+#endif
+
/* ---------------------------------------------
* Enable the SMP bit.
* Clobbers : x0
ret
endfunc check_errata_cve_2017_5715
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2018_3639
+
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A75. Must follow AAPCS.
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a73, cve_2018_3639
ldp x8, x30, [sp], #16
ret
1:
#endif
+#if WORKAROUND_CVE_2018_3639
+ mrs x0, CORTEX_A75_CPUACTLR_EL1
+ orr x0, x0, #CORTEX_A75_CPUACTLR_EL1_DISABLE_LOAD_PASS_STORE
+ msr CORTEX_A75_CPUACTLR_EL1, x0
+ isb
+#endif
+
#if ENABLE_AMU
/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
mrs x0, actlr_el3
ret
endfunc check_errata_cve_2017_5715
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2018_3639
+
/* ---------------------------------------------
* HW will do the cache maintenance while powering down
* ---------------------------------------------
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a75, cve_2018_3639
ldp x8, x30, [sp], #16
ret
A57_DISABLE_NON_TEMPORAL_HINT ?=1
WORKAROUND_CVE_2017_5715 ?=1
+WORKAROUND_CVE_2018_3639 ?=1
# Process SKIP_A57_L1_FLUSH_PWR_DWN flag
$(eval $(call assert_boolean,SKIP_A57_L1_FLUSH_PWR_DWN))
$(eval $(call assert_boolean,WORKAROUND_CVE_2017_5715))
$(eval $(call add_define,WORKAROUND_CVE_2017_5715))
+# Process WORKAROUND_CVE_2018_3639 flag
+$(eval $(call assert_boolean,WORKAROUND_CVE_2018_3639))
+$(eval $(call add_define,WORKAROUND_CVE_2018_3639))
+
# CPU Errata Build flags.
# These should be enabled by the platform if the erratum workaround needs to be
# applied.
if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
return 1;
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
+#endif
+#if WORKAROUND_CVE_2018_3639
+ case SMCCC_ARCH_WORKAROUND_2:
+ return SMCCC_ARCH_NOT_REQUIRED;
#endif
default:
return SMC_UNK;
* has no effect.
*/
SMC_RET0(handle);
+#endif
+#if WORKAROUND_CVE_2018_3639
+ case SMCCC_ARCH_WORKAROUND_2:
+ /*
+ * The workaround has already been applied on affected PEs
+ * requiring dynamic mitigation during entry to EL3.
+ * On unaffected or statically mitigated PEs, this function
+ * has no effect.
+ */
+ SMC_RET0(handle);
#endif
default:
WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",